//Processing Code
// - Super Fast Blur v1.1 by Mario Klingemann
// - BlobDetection library
import processing.video.*;
import blobDetection.*;
import de.humatic.dsj.*;
import java.awt.image.BufferedImage;
DCapture cap;
Capture cam;
BlobDetection theBlobDetection;
PImage img;
boolean newFrame=false;
// ==================================================
// setup()
// ==================================================
void setup()
{
// Size of applet
size(1920, 1080);
background(0);
cap = new DCapture();
// Capture
//cam = new Capture(this, 1920, 1080, 30);
// Comment the following line if you use Processing 1.5
//cam.start();
// BlobDetection
// img which will be sent to detection (a smaller copy of the cam frame);
img = new PImage(80,60);
theBlobDetection = new BlobDetection(img.width, img.height);
theBlobDetection.setPosDiscrimination(true);
theBlobDetection.setThreshold(0.2f); // will detect bright areas whose luminosity > 0.2f;
}
// ==================================================
// captureEvent()
// ==================================================
void captureEvent(Capture cam)
{
cam.read();
newFrame = true;
}
// ==================================================
// draw()
// ==================================================
void draw()
{
image(cap.updateImage(), 0, 0, cap.width, cap.height);
if (newFrame)
{
newFrame=false;
image(cam,0,0,width,height);
img.copy(cam, 0, 0, cam.width, cam.height,
0, 0, img.width, img.height);
fastblur(img, 2);
theBlobDetection.computeBlobs(img.pixels);
drawBlobsAndEdges(true,true);
}
}
// ==================================================
// drawBlobsAndEdges()
// ==================================================
void drawBlobsAndEdges(boolean drawBlobs, boolean drawEdges)
{
noFill();
Blob b;
EdgeVertex eA,eB;
for (int n=0 ; n
// ==================================================
void fastblur(PImage img,int radius)
{
if (radius<1){
return;
}
int w=img.width;
int h=img.height;
int wm=w-1;
int hm=h-1;
int wh=w*h;
int div=radius+radius+1;
int r[]=new int[wh];
int g[]=new int[wh];
int b[]=new int[wh];
int rsum,gsum,bsum,x,y,i,p,p1,p2,yp,yi,yw;
int vmin[] = new int[max(w,h)];
int vmax[] = new int[max(w,h)];
int[] pix=img.pixels;
int dv[]=new int[256*div];
for (i=0;i<256*div;i++){
dv[i]=(i/div);
}
yw=yi=0;
for (y=0;y>16;
gsum+=(p & 0x00ff00)>>8;
bsum+= p & 0x0000ff;
}
for (x=0;x>16;
gsum+=((p1 & 0x00ff00)-(p2 & 0x00ff00))>>8;
bsum+= (p1 & 0x0000ff)-(p2 & 0x0000ff);
yi++;
}
yw+=w;
}
for (x=0;x
//Processing
import de.humatic.dsj.*;
import java.awt.image.BufferedImage;
class DCapture implements java.beans.PropertyChangeListener {
private DSCapture capture;
public int width, height;
DCapture() {
DSFilterInfo[][] dsi = DSCapture.queryDevices();
capture = new DSCapture(DSFiltergraph.DD7, dsi[0][0], false,
DSFilterInfo.doNotRender(), this);
width = capture.getDisplaySize().width;
height = capture.getDisplaySize().height;
}
public PImage updateImage() {
PImage img = createImage(width, height, RGB);
BufferedImage bimg = capture.getImage();
bimg.getRGB(0, 0, img.width, img.height, img.pixels, 0, img.width);
img.updatePixels();
return img;
}
public void propertyChange(java.beans.PropertyChangeEvent e) {
switch (DSJUtils.getEventType(e)) {
}
}
}